-GLOBAL_ENTRY(ia64_leave_hypervisor)
+GLOBAL_ENTRY(ia64_leave_hypervisor_prepare)
PT_REGS_UNWIND_INFO(0)
/*
* work.need_resched etc. mustn't get changed by this CPU before it returns to
;;
* user- or fsys-mode, hence we disable interrupts early on:
*/
+ adds r2 = PT(R4)+16,r12
+ adds r3 = PT(R5)+16,r12
+ adds r8 = PT(EML_UNAT)+16,r12
+ ;;
+ ld8 r8 = [r8]
+ ;;
+ mov ar.unat=r8
+ ;;
+ ld8.fill r4=[r2],16 //load r4
+ ld8.fill r5=[r3],16 //load r5
+ ;;
+ ld8.fill r6=[r2] //load r6
+ ld8.fill r7=[r3] //load r7
+ ;;
+END(ia64_leave_hypervisor_prepare)
+//fall through
+GLOBAL_ENTRY(ia64_leave_hypervisor)
+ PT_REGS_UNWIND_INFO(0)
rsm psr.i
;;
alloc loc0=ar.pfs,0,1,1,0
- adds out0=16,r12
- adds r7 = PT(EML_UNAT)+16,r12
;;
- ld8 r7 = [r7]
+ adds out0=16,r12
br.call.sptk.many b0=leave_hypervisor_tail
;;
mov ar.pfs=loc0
- mov ar.unat=r7
adds r20=PT(PR)+16,r12
;;
lfetch [r20],PT(CR_IPSR)-PT(PR)
ldf.fill f10=[r2],32
ldf.fill f11=[r3],24
;;
- ld8.fill r4=[r2],16 //load r4
- ld8.fill r5=[r3],16 //load r5
- ;;
- ld8.fill r6=[r2] //load r6
- ld8.fill r7=[r3] //load r7
- ;;
srlz.i // ensure interruption collection is off
;;
bsw.0
;;
vmx_itlb_out:
mov r19 = 1
- br.sptk vmx_dispatch_tlb_miss
+ br.sptk vmx_dispatch_itlb_miss
VMX_FAULT(1);
END(vmx_itlb_miss)
;;
vmx_dtlb_out:
mov r19 = 2
- br.sptk vmx_dispatch_tlb_miss
+ br.sptk vmx_dispatch_dtlb_miss
VMX_FAULT(2);
END(vmx_dtlb_miss)
srlz.i // guarantee that interruption collection is on
;;
(p15) ssm psr.i // restore psr.i
- movl r14=ia64_leave_hypervisor
+ movl r14=ia64_leave_hypervisor_prepare
;;
VMX_SAVE_REST
+ VMX_SAVE_EXTRA
mov rp=r14
;;
adds out1=16,sp //regs
br.call.sptk.many b6=vmx_vexirq
END(vmx_dispatch_vexirq)
-ENTRY(vmx_dispatch_tlb_miss)
+ENTRY(vmx_dispatch_itlb_miss)
VMX_SAVE_MIN_WITH_COVER_R19
alloc r14=ar.pfs,0,0,3,0
mov out0=cr.ifa
;;
adds out2=16,r12
br.call.sptk.many b6=vmx_hpw_miss
-END(vmx_dispatch_tlb_miss)
+END(vmx_dispatch_itlb_miss)
+ENTRY(vmx_dispatch_dtlb_miss)
+ VMX_SAVE_MIN_WITH_COVER_R19
+ alloc r14=ar.pfs,0,0,3,0
+ mov out0=cr.ifa
+ mov out1=r15
+ adds r3=8,r2 // set up second base pointer
+ ;;
+ ssm psr.ic
+ ;;
+ srlz.i // guarantee that interruption collection is on
+ ;;
+ (p15) ssm psr.i // restore psr.i
+ movl r14=ia64_leave_hypervisor_prepare
+ ;;
+ VMX_SAVE_REST
+ VMX_SAVE_EXTRA
+ mov rp=r14
+ ;;
+ adds out2=16,r12
+ br.call.sptk.many b6=vmx_hpw_miss
+END(vmx_dispatch_dtlb_miss)
ENTRY(vmx_dispatch_break_fault)
VMX_SAVE_MIN_WITH_COVER_R19
stf.spill [r3]=f9,32; \
;; \
stf.spill [r2]=f10,32; \
- stf.spill [r3]=f11,24; \
- ;; \
-.mem.offset 0,0; st8.spill [r2]=r4,16; \
-.mem.offset 8,0; st8.spill [r3]=r5,16; \
- ;; \
-.mem.offset 0,0; st8.spill [r2]=r6,16; \
-.mem.offset 8,0; st8.spill [r3]=r7; \
- adds r25=PT(B7)-PT(R7),r3; \
+ stf.spill [r3]=f11; \
+ adds r25=PT(B7)-PT(F11),r3; \
;; \
st8 [r24]=r18,16; /* b6 */ \
st8 [r25]=r19,16; /* b7 */ \
+ adds r3=PT(R5)-PT(F11),r3; \
;; \
st8 [r24]=r9; /* ar.csd */ \
- mov r26=ar.unat; \
- ;; \
st8 [r25]=r10; /* ar.ssd */ \
- st8 [r2]=r26; /* eml_unat */ \
;;
+#define VMX_SAVE_EXTRA \
+.mem.offset 0,0; st8.spill [r2]=r4,16; \
+.mem.offset 8,0; st8.spill [r3]=r5,16; \
+ ;; \
+.mem.offset 0,0; st8.spill [r2]=r6,16; \
+.mem.offset 8,0; st8.spill [r3]=r7; \
+ ;; \
+ mov r26=ar.unat; \
+ ;; \
+ st8 [r2]=r26; /* eml_unat */ \
+
#define VMX_SAVE_MIN_WITH_COVER VMX_DO_SAVE_MIN(cover, mov r30=cr.ifs,)
#define VMX_SAVE_MIN_WITH_COVER_R19 VMX_DO_SAVE_MIN(cover, mov r30=cr.ifs, mov r15=r19)
#define VMX_SAVE_MIN VMX_DO_SAVE_MIN( , mov r30=r0, )